Next | Prev | Up | Top | Contents | Index

The Video Source Extension

The video source extension, SGIX_video_source, lets you source pixel data from a video stream to the OpenGL renderer. The video source extension is available only for system configurations that have direct hardware paths from the video hardware to the graphics accelerator. On other systems, you need to transfer video data to host memory and then call glDrawPixels() or glTex{Sub}Image() to transfer data to the framebuffer or to texture memory.

The video source extension introduces a new type of GLXDrawable--GLXVideoSourceSGIX--that is associated with the drain node of a Video Library (VL) path. A GLXVideoSourceSGIX drawable can only be used as the read parameter to glXMakeCurrentReadSGI() to indicate that pixel data should be read from the specified video source instead of the framebuffer.

Caution: This extension is an SGIX (experimental) extension. The interface may change, or it may not be supported in future releases. The remainder of this section presents two examples: Example 9-1 demonstrates the video to graphics capability of the Sirius video board using OpenGL. Example 9-2 is a code fragment for how to use the video source extension to load video into texture memory.

Example 9-1 : Use of the Video Source Extension

/*
 * vidtogfx.c
 *  This VL program demonstrates the Sirius Video board video->graphics
 *  ability using OpenGL.
 *  The video arrives as fields of an interlaced format.  It is 
 *  displayed either by interlacing the previous and the current 
 *  field or by pixel-zooming the field in Y by 2.
 */
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <vl/vl.h>
#include <vl/dev_sirius.h>
#include <GL/glx.h>
#include "xwindow.h"
#include <X11/keysym.h>

/* Video path variables */
VLServer svr;
VLPath path;
VLNode src;
VLNode drn;
/* Video frame size info */
VLControlValue size;

int F1_is_first;                /* Which field is first */

/* OpenGL/X variables */
Display *dpy;
Window window;
GLXVideoSourceSGIX glxVideoSource;
GLXContext ctx;
GLboolean interlace = GL_FALSE;
/*
 * function prototypes
 */
void usage(char *, int);
void InitGfx(int, char **);
void GrabField(int);
void UpdateTiming(void);
void cleanup(void);
void ProcessVideoEvents(void);
static void loop(void);
int
main(int argc, char **argv)
{
    int         c, insrc = VL_ANY;
    int         device = VL_ANY;
    short       dev, val;
    /* open connection to VL server */

    if (!(svr = vlOpenVideo(""))) {
        printf("couldn't open connection to VL server\n");
        exit(EXIT_FAILURE);
    }

    /* Get the Video input */
    src = vlGetNode(svr, VL_SRC, VL_VIDEO, insrc);
    /* Get the first Graphics output */
    drn = vlGetNode(svr, VL_DRN, VL_GFX, 0);

    /* Create path   */
    path = vlCreatePath(svr, device, src, drn);
    if (path < 0) {
        vlPerror("vlCreatePath");
        exit(EXIT_FAILURE);
    }
    /* Setup path */
    if (vlSetupPaths(svr, (VLPathList)&path, 1, VL_SHARE, 
                           VL_SHARE) < 0) {
        vlPerror("vlSetupPaths");
        exit(EXIT_FAILURE);
    }
    UpdateTiming();
    if (vlSelectEvents(svr, path,VLStreamPreemptedMask |
                            VLControlChangedMask ) < 0) {
            vlPerror("Select Events");
            exit(EXIT_FAILURE);
    }
    /* Open the GL window for gfx transfers */
    InitGfx(argc, argv);
    /* Begin Transfers */
    vlBeginTransfer(svr, path, 0, NULL);
    /* The following sequence grabs each field and displays it in
     * the GL window.
     */
    loop();
}
void
loop()
{
  XEvent event;
  KeySym key;
  XComposeStatus compose;
  GLboolean clearNeeded = GL_FALSE;

  while (GL_TRUE) {
    /* Process X events */
    while(XPending(dpy)) {
      XNextEvent(dpy, &event);
      /* Don't really need to handle expose as video is coming at
       * refresh speed.
       */
      if (event.type == case KeyPress) {
        XLookupString(&event.xkey, NULL, 0, &key, NULL);
        switch (key) {
         case XK_Escape:
          exit(EXIT_SUCCESS);
         case XK_i:
          if (hasInterlace) {
            interlace = !interlace;
            if (!interlace) {
              if (!glXMakeCurrentReadSGI(dpy, window,
                                         glxVideoSource, ctx)) {
                fprintf(stderr,
                        "Can't make current to video\n");
                exit(EXIT_FAILURE);
              }
            } else if (!glXMakeCurrent(dpy, window, ctx)) {
              fprintf(stderr,
                      "Can't make window current to context\n");
              exit(EXIT_FAILURE);
            }
            printf("Interlace is %s\n", interlace ? "On" : "Off");
            /* Clear both buffers */
            glClear(GL_COLOR_BUFFER_BIT);
            glXSwapBuffers(dpy, window);
            glClear(GL_COLOR_BUFFER_BIT);
            glXSwapBuffers(dpy, window);
            glRasterPos2f(0, size.xyVal.y - 1);
          } else {
            printf("Graphics interlacing is not supported\n");
          }
          break;
        }
      }
    }
    ProcessVideoEvents();
    GrabField(0);
    glXSwapBuffers(dpy, window);
    GrabField(1);
    glXSwapBuffers(dpy, window);
  }
}

/*
 * Open an X window of appropriate size and create context.
 */
void
InitGfx(int argc, char **argv)
{
  int i;
  XSizeHints hints;
  int visualAttr[] = {GLX_RGBA, GLX_DOUBLEBUFFER, GLX_RED_SIZE, 12,
                      GLX_GREEN_SIZE, 12, GLX_BLUE_SIZE, 12,
                      None};
  const char *extensions;

  /* Set hints so window size is exactly as the video frame size */
  hints.x = 50; hints.y = 0;
  hints.min_aspect.x = hints.max_aspect.x = size.xyVal.x;
  hints.min_aspect.y = hints.max_aspect.y = size.xyVal.y;
  hints.min_width = size.xyVal.x;
  hints.max_width = size.xyVal.x;
  hints.base_width = hints.width = size.xyVal.x;
  hints.min_height = size.xyVal.y;
  hints.max_height = size.xyVal.y;
  hints.base_height = hints.height = size.xyVal.y;
  hints.flags = USSize | PAspect | USPosition | PMinSize | PMaxSize;
  createWindowAndContext(&dpy, &window, &ctx, 50, 0, size.xyVal.x,
                  size.xyVal.y, GL_FALSE, &hints, visualAttr, argv[0]);
    
  /* Verify that MakeCurrentRead and VideoSource are supported */
  ....
  glxVideoSource = glXCreateGLXVideoSourceSGIX(dpy, 0, svr, path,
                                               VL_GFX, drn);
  if (glxVideoSource == NULL) {
    fprintf(stderr, "Can't create glxVideoSource\n");
    exit(EXIT_FAILURE);
  }
  if (!glXMakeCurrentReadSGI(dpy, window, glxVideoSource, ctx)) {
    fprintf(stderr, "Can't make current to video\n");
    exit(EXIT_FAILURE);
  }
  /* Set up the viewport according to the video frame size */
  glLoadIdentity();
  glViewport(0, 0, size.xyVal.x, size.xyVal.y);
  glOrtho(0, size.xyVal.x, 0, size.xyVal.y, -1, 1);
  /* Video is top to bottom */
  glPixelZoom(1, -2);
  glRasterPos2f(0, size.xyVal.y - 1);
  glReadBuffer(GL_FRONT);
  /* Check for interlace extension. */
  hasInterlace = ... /* Interlace is supported or not */
}
/*
 * Grab a field. A parameter of  1 = odd Field, 0 = Even Field.
 * Use the global F1_is_first variable to determine how to
 * interleave the fields.
 */
void
GrabField(int odd_field)
{
  /* copy pixels from front to back buffer */
  if (interlace) {
    /* Restore zoom and transfer mode */
    glRasterPos2i(0, 0);
    glPixelZoom(1, 1);
    glCopyPixels(0, 0, size.xyVal.x, size.xyVal.y, GL_COLOR);

    /* Copy the field from Sirius Video to GFX subsystem */
    if (!glXMakeCurrentReadSGI(dpy, window, glxVideoSource, ctx)) {
      fprintf(stderr, "Can't make current to video\n");
      exit(EXIT_FAILURE);
    }
    if (odd_field) {
      if (F1_is_first) {
        /* F1 dominant, so odd field is first. */
        glRasterPos2f(0, size.xyVal.y - 1);
      } else {
        /* F2 dominant, so even field is first. */
        glRasterPos2f(0, size.xyVal.y - 2);
      }
    } else {
      if (F1_is_first) {
        /* F1 dominant, so odd field is first. */
        glRasterPos2f(0, size.xyVal.y - 2);
      } else {
        /* F2 dominant, so even field is first. */
        glRasterPos2f(0, size.xyVal.y - 1);
      }
    }
#ifdef GL_SGIX_interlace
    if (hasInterlace)
      glEnable(GL_INTERLACE_SGIX);
#endif
    /* video is upside down relative to graphics */
    glPixelZoom(1, -1);
    glCopyPixels(0, 0, size.xyVal.x, size.xyVal.y/2, GL_COLOR);
    if (!glXMakeCurrent(dpy, window, ctx)) {
      fprintf(stderr, "Can't make current to original window\n");
      exit(EXIT_FAILURE);
    }
#ifdef GL_SGIX_interlace
    if (hasInterlace)
      glDisable(GL_INTERLACE_SGIX);
#endif
  } else { 
    /* Not deinterlacing */
    glPixelZoom(1, -2);
    if (!odd_field) {
      if (!F1_is_first) {
        /* F1 dominant, so odd field is first. */
        glRasterPos2f(0, size.xyVal.y - 1);
      } else {
        /* F2 dominant, so even field is first. */
        glRasterPos2f(0, size.xyVal.y - 2);
      }
    } else {
      if (!F1_is_first) {
        /* F1 dominant, so odd field is first. */
        glRasterPos2f(0, size.xyVal.y - 2);
      } else {
        /* F2 dominant, so even field is first. */
        glRasterPos2f(0, size.xyVal.y - 1);
      }
    }
    
glCopyPixels(0, 0, size.xyVal.x, size.xyVal.y/2, GL_COLOR);
  }
}

/*
 * Get video timing info.
 */
void
UpdateTiming(void)
{
  int is_525;
  VLControlValue timing, dominance;

  /* Get the timing on selected input node */
  if (vlGetControl(svr, path, src, VL_TIMING, &timing) <0) {
    vlPerror("VlGetControl:TIMING");
    exit(EXIT_FAILURE);
  }
  /* Set the GFX Drain to the same timing as input src */
  if (vlSetControl(svr, path, drn, VL_TIMING, &timing) <0) {
    vlPerror("VlSetControl:TIMING");
    exit(EXIT_FAILURE);
  }
  if (vlGetControl(svr, path, drn, VL_SIZE, &size) <0) {
    vlPerror("VlGetControl");
    exit(EXIT_FAILURE);
  }
  /*
   * Read the video source's field dominance control setting and 
   * timing, then set a variable to indicate which field has the first 
   * line, so that we know how to interleave fields to frames.
   */
  if (vlGetControl(svr, path, src,
                   VL_SIR_FIELD_DOMINANCE, &dominance) < 0) {
    vlPerror("GetControl(VL_SIR_FIELD_DOMINANCE) on video source 
                                                        failed");
    exit(EXIT_FAILURE);
  }

  is_525 = ( (timing.intVal == VL_TIMING_525_SQ_PIX) ||
             (timing.intVal == VL_TIMING_525_CCIR601) );

  switch (dominance.intVal) {
    case SIR_F1_IS_DOMINANT:

      if (is_525) {
        F1_is_first = 0;
      } else {
        F1_is_first = 1;
      }
      break;
    case SIR_F2_IS_DOMINANT:
      if (is_525) {
        F1_is_first = 1;
      } else {
        F1_is_first = 0;
      }
      break;
  }
}

void
cleanup(void)
{
  vlEndTransfer(svr, path);
  vlDestroyPath(svr, path);
  vlCloseVideo(svr);
  exit(EXIT_SUCCESS);
}

void
ProcessVideoEvents(void)
{
  VLEvent ev;

  if (vlCheckEvent(svr, VLControlChangedMask|
                   VLStreamPreemptedMask, &ev) == -1) {
    return;
  }
  switch(ev.reason) {
    case VLStreamPreempted:
      cleanup();
      exit(EXIT_SUCCESS);
    case VLControlChanged:
      switch(ev.vlcontrolchanged.type) {
        case VL_TIMING:
        case VL_SIZE:
        case VL_SIR_FIELD_DOMINANCE:
          UpdateTiming();
          /* change the gl window size */
          XResizeWindow(dpy, window, size.xyVal.x, size.xyVal.y);
          glXWaitX();
          glLoadIdentity();
          glViewport(0, 0, size.xyVal.x, size.xyVal.y );
          glOrtho(0, size.xyVal.x, 0, size.xyVal.y, -1, 1);
          break;
        default:
          break;
      }
      break;
    default:
      break;
  }
}

Example 9-2 : Loading Video Into Texture Memory

Display *dpy;
Window win;
GLXContext cx;
VLControlValue size, texctl;
int tex_width, tex_height;
VLServer svr;
VLPath path;
VLNode src, drn;

static void init_video_texturing(void)
{
    GLXVideoSourceSGIX videosource;
    GLenum intfmt;
    int scrn;
    float s_scale, t_scale;

    /* set video drain to texture memory */
    drn = vlGetNode(svr, VL_DRN, VL_TEXTURE, 0);

    /* assume svr, src, and path have been initialized as usual */

    /* get the active video area */
    if (vlGetControl(svr, path, src, VL_SIZE, &size) < 0) {
        vlPerror("vlGetControl");
    }
    /* use a texture size that will hold all of the video area */
    /* for simplicity, this handles only 1024x512 or 1024x1024 */

    tex_width = 1024;
    if (size.xyVal.y > 512) {
        tex_height = 1024;
    } else {
        tex_height = 512;
    }
    /* Set up a texture matrix so that texture coords in 0 to 1    */
    /* range will map to the active video area.  We want           */
    /* s' = s * s_scale                                            */
    /* t' = (1-t) * t_scale  (because video is upside down).       */
    s_scale = size.xyVal.x / (float)tex_width;
    t_scale = size.xyVal.y / (float)tex_height;
    glMatrixMode(GL_TEXTURE);
    glLoadIdentity();
    glScalef(s_scale, -t_scale, 1);
    glTranslatef(0, t_scale, 0);

    /* choose video packing mode */
    texctl.intVal = SIR_TEX_PACK_RGBA_8;
    if (vlSetControl(svr, path, drn, VL_PACKING, &texctl) <0) {
        vlPerror("VlSetControl");
    }
    /* choose internal texture format; must match video packing mode */
    intfmt = GL_RGBA8_EXT;

    glEnable(GL_TEXTURE_2D);
    /* use a non-mipmap minification filter */
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    /* use NULL texture image, so no image has to be sent from host */
    glTexImage2D(GL_TEXTURE_2D, 0, intfmt, tex_width, tex_height, 0,
                 GL_RGBA, GL_UNSIGNED_BYTE, NULL);
    
    if ((videosource = glXCreateGLXVideoSourceSGIX(dpy, scrn, svr,
                                     path, VL_TEXTURE, drn)) == None) {
        fprintf(stderr, "can't create video source\n");
        exit(1);
    }
    glXMakeCurrentReadSGI(dpy, win, videosource, cx);
}

static void draw(void)
{
    /* load video into texture memory */
    glCopyTexSubImage2DEXT(GL_TEXTURE_2D, 0, 0, 0, 0, 0,
                           size.xyVal.x, size.xyVal.y);

    /* draw the video frame */
    glBegin(GL_POLYGON);
    glTexCoord2f(0,0); glVertex2f(0, 0);
    glTexCoord2f(1,0); glVertex2f(size.xyVal.x, 0);
    glTexCoord2f(1,1); glVertex2f(size.xyVal.x, size.xyVal.y);
    glTexCoord2f(0,1); glVertex2f(0, size.xyVal.y);
    glEnd();
    
}

New Functions

Next | Prev | Up | Top | Contents | Index